spark 操作hbase


http://www.cnblogs.com/weijueye/p/4957989.html

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
import org.apache.hadoop.hbase.client.{ConnectionFactory, Scan}
import org.apache.hadoop.hbase.util.Bytes
import org.apache.hadoop.hbase.{HBaseConfiguration, TableName}

/**
* Created by zzg on 15-12-25.
*/
object HBaseTest {
def main(args: Array[String]): Unit = {

try{
val connection = ConnectionFactory.createConnection(hBaseConf)

val table = connection.getTable(TableName.valueOf("test"))
val scan = new Scan()
val resultScanner = table.getScanner(scan)
var res = resultScanner.next()
while(res!=null){
val rowBytes = res.getRow
val key = Bytes.toString(rowBytes)
println("ok")
println(key)
res = resultScanner.next()
}
}catch {
case a: Exception=> a.printStackTrace()
}

}
}